Preprocessing

library(tidyverse)
library(ggdist)
library(ggside)
library(easystats)
library(patchwork)

illusion1 <- read.csv("../data/raw_illusion1.csv") |>
  mutate(
    Illusion_Effect = fct_relevel(as.factor(Illusion_Effect), "Incongruent", "Congruent"),
    Block = as.factor(Block)
  )

illusion2 <- read.csv("../data/raw_illusion2.csv") |>
  mutate(
    Illusion_Effect = fct_relevel(as.factor(Illusion_Effect), "Incongruent", "Congruent"),
    Block = as.factor(Block)
  )

perceptual <- read.csv("../data/raw_perceptual.csv") |>
  mutate(
    Block = as.factor(Block)
  )

sub <- read.csv("../data/raw_questionnaires.csv") |> 
  mutate(
    Education = fct_relevel(Education, 
                            # "Prefer not to Say", 
                            "High school", "Bachelor", "Master", "Doctorate", "Other")
  )


# For prolific:
# bayestestR::point_estimate(sub$Duration_Session1)
# plot(bayestestR::estimate_density(sub$Duration_Session1))

Outlier Detection (Tasks)

# Dear participant, thank you for participating in our study. Unfortunately, our system detected multiple issues in your data (such as implausibly short responses - in particular in the 2nd part of the study, random-like pattern of answers, or the same response to different scales - as some were designed to trigger various answers), which makes it unusable. We understand that you might have been in a hurry or had some other issues, and so we kindly ask you to return your participation; we hope to open-up more slots in the future would you be interested to participate again. 

# Dear participant, thank you for participating in our study. Unfortunately, our system detected multiple issues in your data (such as implausibly short responses - in particular in the 2nd part of the study, random-like pattern of answers, or the same response to different scales - as some were designed to trigger various answers), which makes it unusable for us. We understand that you might have been in a hurry or had some other issues; we hope to open-up more slots in the future would you be interested to participate again. 

outliers_perceptual <- c(
  "S003",
  "S008"
)
outliers_illusion1 <- c(
  "S008"
  )
outliers_illusion1_block2 <- c(
  "S153"
  )
outliers_illusion2 <- c(
  "S003"
)

We removed 1, 2, and 1 participants for the illusion task - session 1, perceptual task, and illusion task - session 2 respectively, upon inspection of the average error rage (when close to 50%, suggesting random answers) and/or when the reaction time distribution was implausibly fast.

Descriptive Table

data <- rbind(illusion1, illusion2, perceptual) |>
  filter(RT < 10) |>
  mutate(
    Participant = fct_rev(Participant),
    Task = fct_relevel(Task, "Illusion_Session1", "Perceptual", "Illusion_Session2")
  )

table <- data |>
  group_by(Participant, Task) |>
  summarize(
    Error = sum(Error) / n(),
    RT = mean(RT)
  ) |>
  ungroup() |>
  arrange(desc(Error)) |>
  tidyr::pivot_wider(names_from = "Task", values_from = c("Error", "RT"), names_vary = "slowest") |>
  datawizard::data_relocate(ends_with("Session2"), after = -1) |>
  arrange(desc(Error_Illusion_Session1))
data.frame(Participant = c("Average"), t(sapply(table[2:ncol(table)], mean, na.rm = TRUE))) |>
  rbind(table) |>
  knitr::kable() |>
  kableExtra::row_spec(1, italic = TRUE, background = "grey", color = "white") |>
  kableExtra::row_spec(which(table$Participant %in% c(outliers_perceptual, outliers_illusion1, outliers_illusion2)) + 1, background = "#EF9A9A") |>
  kableExtra::row_spec(which(table$Participant %in% c(outliers_illusion1_block2)) + 1, background = "orange") |>
  # kableExtra::column_spec(2, color="white",
  #                         background = kableExtra::spec_color(c(NA, table$Error_Illusion_Session1))) |>
  kableExtra::kable_styling(full_width = TRUE) |>
  kableExtra::scroll_box(width = "100%", height = "500px")
Participant Error_Perceptual RT_Perceptual Error_Illusion_Session1 RT_Illusion_Session1 RT_Illusion_Session2 Error_Illusion_Session2
Average 0.071 0.678 0.192 0.760 0.731 0.265
S008 0.500 0.347 0.417 0.610
S110 0.323 2.553 0.391 1.519
S124 0.167 0.606 0.383 0.708
S153 0.109 0.699 0.370 0.575
S002 0.115 0.872 0.346 0.713 0.945 0.396
S023 0.125 0.619 0.346 0.631
S070 0.149 1.014 0.314 1.118
S117 0.182 0.412 0.305 0.490
S146 0.042 0.709 0.297 0.619
S151 0.177 0.542 0.292 0.570
S144 0.146 0.691 0.289 0.737
S080 0.120 0.532 0.286 0.477
S132 0.135 0.446 0.284 0.469
S178 0.146 0.507 0.281 0.542
S135 0.125 0.504 0.279 0.516
S087 0.151 0.589 0.279 0.576
S152 0.078 0.549 0.266 0.947
S174 0.062 0.730 0.263 0.617
S167 0.193 0.591 0.263 0.665
S021 0.250 0.703 0.263 0.544
S059 0.021 0.488 0.260 0.542
S073 0.104 0.596 0.258 0.665
S006 0.089 0.620 0.258 0.529 0.723 0.216
S154 0.141 0.528 0.254 0.615
S173 0.062 0.760 0.253 0.754
S140 0.068 0.636 0.253 0.591
S136 0.078 0.528 0.253 0.549
S123 0.083 0.665 0.253 0.815
S166 0.120 0.472 0.250 0.520
S129 0.062 0.572 0.250 0.674
S094 0.089 0.514 0.250 0.602
S162 0.016 0.595 0.247 0.509
S133 0.099 0.583 0.247 0.493
S112 0.073 0.492 0.247 0.490
S053 0.151 0.475 0.245 0.511
S048 0.052 0.502 0.242 0.502
S101 0.177 0.406 0.240 0.427
S156 0.089 0.524 0.237 0.665
S025 0.073 0.598 0.237 0.713
S007 0.089 0.770 0.234 0.894
S177 0.130 0.795 0.233 0.701
S163 0.047 0.738 0.232 0.658
S036 0.068 0.749 0.232 0.845
S083 0.042 0.561 0.229 0.588
S063 0.042 0.797 0.228 0.885
S014 0.047 0.548 0.227 0.641
S103 0.036 0.562 0.224 0.613
S097 0.068 0.528 0.224 0.791
S019 0.052 0.496 0.224 0.618
S046 0.073 0.553 0.221 0.620
S042 0.161 0.493 0.221 0.560
S139 0.052 0.537 0.219 0.605
S109 0.135 0.641 0.219 0.736
S092 0.036 0.810 0.217 1.202
S157 0.057 0.521 0.216 0.571
S079 0.052 0.588 0.216 0.586
S131 0.089 0.584 0.214 0.572
S102 0.120 0.526 0.214 0.514
S003 0.214 0.441 0.211 0.853 0.331 0.371
S107 0.036 0.722 0.211 0.808
S064 0.073 0.714 0.211 0.885
S128 0.115 0.603 0.208 0.596
S093 0.057 0.512 0.208 0.516
S043 0.057 0.823 0.208 0.688
S175 0.109 1.099 0.206 1.272
S145 0.099 0.498 0.206 0.541
S086 0.099 0.717 0.206 0.726
S116 0.104 0.635 0.203 0.760
S113 0.073 0.643 0.203 0.758
S108 0.089 0.569 0.203 0.715
S096 0.094 0.473 0.203 0.554
S172 0.057 0.470 0.201 0.567
S105 0.052 0.525 0.201 0.617
S100 0.083 0.730 0.201 0.740
S066 0.031 0.728 0.201 0.859
S001 0.323 0.946 0.199 0.986
S130 0.021 0.606 0.195 0.660
S125 0.068 0.563 0.195 0.605
S052 0.031 0.541 0.195 0.537
S122 0.089 0.556 0.193 0.621
S085 0.177 0.637 0.193 0.824
S047 0.094 0.624 0.191 0.823
S072 0.057 0.960 0.190 0.925
S082 0.125 0.410 0.188 0.470
S067 0.099 0.550 0.188 0.560
S149 0.062 0.595 0.185 0.638
S126 0.062 0.498 0.185 0.743
S120 0.115 0.656 0.185 0.816
S115 0.089 0.480 0.185 0.703
S111 0.057 0.798 0.185 1.007
S150 0.031 1.134 0.182 1.545
S137 0.042 0.524 0.182 0.722
S104 0.073 0.522 0.182 0.656
S026 0.036 0.732 0.182 0.934
S017 0.068 0.534 0.180 0.606
S121 0.052 0.480 0.180 0.508
S040 0.026 0.829 0.180 0.929
S089 0.031 0.603 0.178 0.888
S044 0.016 0.642 0.177 0.665
S035 0.099 0.497 0.177 0.523
S030 0.068 0.546 0.177 0.718
S013 0.062 0.563 0.177 0.706
S161 0.115 0.562 0.174 0.626
S076 0.031 0.671 0.174 0.655
S034 0.099 0.645 0.174 0.792
S022 0.078 0.500 0.174 0.601
S158 0.073 0.575 0.172 0.631
S065 0.021 0.605 0.172 0.811
S062 0.042 0.641 0.172 0.708
S181 0.062 0.736 0.167 0.746
S143 0.036 0.770 0.167 0.972
S038 0.115 0.618 0.167 0.691
S033 0.083 0.698 0.167 0.752
S024 0.042 0.756 0.167 0.735
S020 0.073 0.833 0.167 0.820
S050 0.016 0.714 0.164 1.025
S009 0.010 0.633 0.164 0.833
S054 0.057 0.535 0.161 0.576
S039 0.083 0.517 0.161 0.524
S004 0.010 0.800 0.161 1.051
S127 0.021 1.066 0.160 1.396
S011 0.042 0.657 0.159 0.755 0.687 0.185
S071 0.016 0.836 0.159 0.748
S114 0.042 0.528 0.156 0.571
S068 0.052 0.624 0.156 1.011
S032 0.031 0.585 0.154 0.550
S119 0.021 1.564 0.149 1.968
S165 0.021 0.679 0.148 0.908
S155 0.042 0.838 0.148 0.688
S147 0.016 0.682 0.148 0.946
S088 0.026 0.878 0.148 1.056
S077 0.057 1.019 0.148 0.884
S074 0.052 0.533 0.146 0.730
S058 0.036 0.596 0.146 0.609
S057 0.021 0.562 0.146 0.645
S084 0.000 0.966 0.143 0.932
S164 0.010 0.629 0.141 0.512
S142 0.083 0.485 0.141 0.518
S045 0.010 0.912 0.141 0.730
S179 0.026 0.725 0.138 0.738
S168 0.078 0.646 0.138 0.631
S160 0.057 0.526 0.138 0.640
S138 0.031 0.629 0.138 0.888
S118 0.005 0.974 0.135 1.150
S015 0.047 0.643 0.135 0.580
S098 0.021 0.849 0.134 0.993
S148 0.062 0.778 0.133 0.764
S095 0.021 0.931 0.133 1.531
S180 0.031 0.616 0.130 0.742
S171 0.036 0.856 0.130 1.159
S091 0.031 0.561 0.130 0.673
S081 0.021 0.718 0.130 0.733
S075 0.068 0.890 0.130 1.089
S060 0.042 0.695 0.130 0.947
S041 0.052 0.672 0.130 0.751
S051 0.016 0.569 0.128 0.753
S037 0.047 0.756 0.128 0.965
S018 0.052 0.682 0.128 0.816
S016 0.021 0.605 0.128 0.678
S061 0.021 0.593 0.126 1.281
S141 0.062 0.572 0.125 0.657
S055 0.057 0.553 0.125 0.756
S031 0.016 0.669 0.125 0.780
S005 0.000 0.999 0.125 0.877
S010 0.068 1.439 0.122 1.152 0.968 0.159
S099 0.016 0.696 0.122 0.691
S027 0.010 0.706 0.122 0.730
S134 0.021 0.635 0.120 0.688
S169 0.036 1.431 0.117 1.828
S028 0.052 0.775 0.117 0.887
S176 0.016 0.606 0.115 1.080
S106 0.016 0.543 0.115 0.581
S056 0.052 0.568 0.115 0.664
S159 0.036 0.724 0.109 0.738
S069 0.010 0.859 0.107 0.893
S078 0.000 1.016 0.104 1.021
S090 0.026 0.610 0.096 0.809
S049 0.042 0.990 0.096 0.968
S170 0.032 0.891 0.094 0.970
S029 0.068 0.875 0.094 0.924
S012 0.016 0.699 0.094 0.928

Reaction Time Distribution

plot_distribution <- function(data) {
  data |>
    estimate_density(select = "RT", at = c("Participant", "Task", "Block")) |>
    group_by(Participant) |>
    normalize(select = "y") |>
    ungroup() |>
    mutate(
      # Participant = fct_relevel(Participant, as.character(table$Participant)),
      color = case_when(
        Participant %in% outliers_perceptual & Task == "Perceptual" ~ "red",
        Participant %in% outliers_illusion1 & Task == "Illusion_Session1" ~ "red",
        Participant %in% outliers_illusion2 & Task == "Illusion_Session2" ~ "red",
        Participant %in% outliers_illusion1_block2 & Task == "Illusion_Session1" ~ "orange",
        TRUE ~ "blue"
      ),
      Task = fct_recode(Task,
        "Illusion task (session 1)" = "Illusion_Session1",
        "Illusion task (session 2)" = "Illusion_Session2",
        "Perceptual task" = "Perceptual"
      )
    ) |>
    ggplot(aes(x = x, y = y)) +
    geom_area(data = normalize(estimate_density(data, select = "RT"), select = "y"), alpha = 0.2) +
    geom_line(aes(color = color, group = interaction(Participant, Block), linetype = Block), size = 0.5) +
    geom_vline(xintercept = 0.125, linetype = "dashed", color = "red", size = 0.5) +
    scale_color_manual(values = c("red" = "#F44336", "orange" = "#FF9800", "blue" = "blue"), guide = "none") +
    scale_x_continuous(expand = c(0, 0)) +
    scale_y_continuous(expand = c(0, 0)) +
    coord_cartesian(xlim = c(0, 2)) +
    theme_modern() +
    theme(axis.text.y = element_blank()) +
    facet_grid(Participant ~ Task) +
    labs(y = "", x = "Reaction Time (s)")
}
# p1 <- data |> 
#   filter(as.numeric(gsub("\\D", "", Participant)) <= 51) |>
#   plot_distribution()
# p2 <- data |> 
#   filter((as.numeric(gsub("\\D", "", Participant)) > 51 & (as.numeric(gsub("\\D", "", Participant)) <= 101) | (as.numeric(gsub("\\D", "", Participant)) == 1)) |>
#   plot_distribution()
p3 <- data |> 
  filter((as.numeric(gsub("\\D", "", Participant)) > 140) | (as.numeric(gsub("\\D", "", Participant)) == 1)) |>
  plot_distribution() 
p3
ggsave("figures/00_outliers_RT.png", p3, width = 8, height = 24, dpi = 100)
knitr::include_graphics("figures/outliers_RT.png")

illusion1 <- filter(illusion1, !Participant %in% outliers_illusion1)
illusion1 <- filter(illusion1, !(Participant %in% outliers_illusion1_block2 & Block == 2))
illusion2 <- filter(illusion2, !Participant %in% outliers_illusion2)
perceptual <- filter(perceptual, !Participant %in% outliers_perceptual)

Outliers Detection (Blocks)

For each block, we computed the error rate and, if more than 50%, we discarded the whole block (as it likely indicates that instructions got mixed up, for instance participants were selecting the smaller instead of the bigger circle).

data <- rbind(illusion1, illusion2, perceptual) |>
  group_by(Participant, Task, Illusion_Type, Block) |>
  summarize(ErrorRate_per_block = sum(Error) / n()) |>
  ungroup() |>
  arrange(desc(ErrorRate_per_block))


data |>
  estimate_density(at = c("Task", "Illusion_Type", "Block"), method = "KernSmooth") |>
  ggplot(aes(x = x, y = y)) +
  geom_line(aes(color = Illusion_Type, linetype = Block)) +
  geom_vline(xintercept = 0.5, linetype = "dashed") +
  scale_x_continuous(expand = c(0, 0)) +
  scale_y_continuous(expand = c(0, 0)) +
  scale_color_manual(values = c("Ebbinghaus" = "#2196F3", "MullerLyer" = "#4CAF50", "VerticalHorizontal" = "#FF5722")) +
  labs(y = "Distribution", x = "Error Rate") +
  theme_modern() +
  facet_wrap(~Task)

remove_badblocks <- function(df) {
  n <- nrow(df)
  df <- df |>
    group_by(Participant, Illusion_Type, Block) |>
    mutate(ErrorRate_per_block = sum(Error) / n()) |>
    ungroup() |>
    filter(ErrorRate_per_block < 0.5) |>
    select(-ErrorRate_per_block)
  
  text <- paste0(
    "We removed ",
    n - nrow(df),
    " (",
    insight::format_value((n - nrow(df)) / n, as_percent = TRUE),
    ") trials belonging to bad blocks."
  )
  list(data = df, text = text)
}

out <- remove_badblocks(illusion1)
print(paste("Illusion (session 1):", out$text))

[1] “Illusion (session 1): We removed 384 (0.56%) trials belonging to bad blocks.”

illusion1 <- out$data

out <- remove_badblocks(illusion2)
print(paste("Illusion (session 2):", out$text))

[1] “Illusion (session 2): We removed 64 (4.17%) trials belonging to bad blocks.”

illusion2 <- out$data

out <- remove_badblocks(perceptual)
print(paste("Perceptual task:", out$text))

[1] “Perceptual task: We removed 224 (0.65%) trials belonging to bad blocks.”

perceptual <- out$data

Outliers Detection (Trials)

Reaction Time per Trial

check_trials <- function(df) {
  data <- df |>
    mutate(Outlier = ifelse(RT >= 10, TRUE, FALSE)) |>
    group_by(Participant) |>
    mutate(Outlier = ifelse(RT < 0.125 | standardize(RT, robust = TRUE) > 4, TRUE, Outlier)) |>
    ungroup()

  p1 <- data |>
    filter(RT < 10) |>
    estimate_density(select = "RT", at = "Participant") |>
    group_by(Participant) |>
    normalize(select = "y") |>
    ungroup() |>
    merge(data |>
      group_by(Participant) |>
      mutate(Threshold = median(RT) + 4 * mad(RT)) |>
      filter(Error == 0) |>
      summarize(Threshold = mean(Threshold))) |>
    mutate(Outlier = ifelse(x >= Threshold, TRUE, FALSE)) |>
    ggplot(aes(x = x, y = y)) +
    geom_area(data = normalize(estimate_density(filter(data, RT < 10), select = "RT"), select = "y"), alpha = 0.2) +
    geom_line(aes(color = Participant, linetype = Outlier), alpha = 0.2) +
    geom_vline(xintercept = c(125), linetype = "dashed", color = "red") +
    scale_color_material_d("rainbow", guide = "none") +
    scale_x_continuous(expand = c(0, 0)) +
    scale_y_continuous(expand = c(0, 0)) +
    guides(linetype = "none") +
    coord_cartesian(xlim = c(0, 5)) +
    theme_modern() +
    theme(axis.text.y = element_blank()) +
    labs(y = "", x = "Reaction Time (s)")


  p2 <- data |>
    group_by(Participant) |>
    summarize(Outlier = sum(Outlier) / nrow(illusion1)) |>
    mutate(Participant = fct_reorder(Participant, Outlier)) |>
    ggplot(aes(x = Participant, y = Outlier)) +
    geom_bar(stat = "identity", aes(fill = Participant)) +
    scale_fill_material_d("rainbow", guide = "none") +
    scale_x_discrete(expand = c(0, 0)) +
    scale_y_continuous(expand = c(0, 0), labels = scales::percent) +
    see::theme_modern() +
    theme(axis.text.x = element_blank()) +
    labs(y = "Percentage of outlier trials")

  text <- paste0(
    "We removed ",
    sum(data$Outlier),
    " (",
    insight::format_value(sum(data$Outlier) / nrow(data), as_percent = TRUE),
    ") outlier trials (125 ms < RT < 4 MAD above median)."
  )

  data <- filter(data, Outlier == FALSE)
  data$Outlier <- NULL

  list(p = p1 / p2, data = data, text = text)
}

Illusion Task (Session 1)

out <- check_trials(illusion1)
print(paste("Illusion (session 1):", out$text))

[1] “Illusion (session 1): We removed 2493 (3.64%) outlier trials (125 ms < RT < 4 MAD above median).”

out$p

illusion1 <- out$data

Illusion Task (Session 2)

out <- check_trials(illusion2)
print(paste("Illusion (session 2):", out$text))

[1] “Illusion (session 2): We removed 49 (3.33%) outlier trials (125 ms < RT < 4 MAD above median).”

out$p

illusion2 <- out$data

Perceptual Task

out <- check_trials(perceptual)
print(paste("Perceptual task:", out$text))

[1] “Perceptual task: We removed 1574 (4.61%) outlier trials (125 ms < RT < 4 MAD above median).”

out$p

perceptual <- out$data

Outlier Detection (Questionnaires)

Multivariate Detection

We compute a multivariate outlier score.

outliers <- sub |> 
  select(AttentionCheck_Session1, 
         IPIP6_RT, PID5_RT, GCBS_RT, ASQ_RT, LIE_RT, SPQ_RT, 
         IPIP6_SD, PID5_SD, GCBS_SD, ASQ_SD, LIE_SD, SPQ_SD, 
         PHQ4_SD) |> 
  standardize() |> 
  performance::check_outliers(method = c("mahalanobis", "mahalanobis_robust", "mcd", "ics"))

sub$Potential_Outliers <- as.data.frame(outliers)$Outlier
# outliers

Manual Check

outliers_questionnaires <- c(
  "S003", "S008", "S068", "S110", "S127"
)

We removed the questionnaire data from 5 participants upon inspection of attention checks and time taken to complete each questionnaires.

table <- sub |>
  mutate(
    Outlier_Task1 = Participant %in% outliers_illusion1,
    Outlier_Task2 = Participant %in% outliers_illusion2,
    Outlier_Task3 = Participant %in% outliers_perceptual,
    Outlier_Tasks = Outlier_Task1 + Outlier_Task2 + Outlier_Task3
  ) |>
  select(
    Participant,
    Outlier_Tasks,
    Potential_Outliers,
    AttentionCheck_Session1,
    IPIP6_RT, PID5_RT, ASQ_RT, SPQ_RT,
    IPIP6_SD, PID5_SD, PHQ4_SD,
    AttentionCheck_Session2,
    BPD_RT, MAIA_RT, PI_RT,
    BPD_SD, MAIA_SD, PI_SD
  ) |> 
  # mutate(across(ends_with("IPIP6_RT") | ends_with("IPIP6_SD"), standardize)) |> 
  # arrange(desc(Outlier_Tasks), AttentionCheck_Session1) 
  arrange(desc(Participant))
t <- data.frame(Participant = c("Average"), t(sapply(table[2:ncol(table)], mean, na.rm = TRUE))) |>
  rbind(table) |>
  knitr::kable() |>
  kableExtra::row_spec(1, italic = TRUE) |>
  kableExtra::row_spec(which(table$Participant %in% outliers_questionnaires) + 1, background = "#EF9A9A")

for (i in 2:ncol(table)) {
  t <- kableExtra::column_spec(
    t, i,
    color = "white",
    background = kableExtra::spec_color(
      c(NA, table[[i]]),
      option = "D",
      alpha = 1,
      # direction = ifelse(str_detect(names(table)[i], "_SD|Outlier"), 1, -1),
      na_color = "white",
    )
  )
}


t  |>
  kableExtra::row_spec(1, background = "grey") |> 
  kableExtra::kable_styling(full_width = TRUE, font_size = 9) |>
  kableExtra::scroll_box(width = "100%", height = "500px")
Participant Outlier_Tasks Potential_Outliers AttentionCheck_Session1 IPIP6_RT PID5_RT ASQ_RT SPQ_RT IPIP6_SD PID5_SD PHQ4_SD AttentionCheck_Session2 BPD_RT MAIA_RT PI_RT BPD_SD MAIA_SD PI_SD
Average 0.022 0.145 0.963 1.792 2.500 2.561 3.11 0.196 0.694 0.432 0.656 1.052 2.99 3.55 0.241 0.129 0.701
S181 0.000 0.250 0.964 1.938 3.761 3.473 4.39 0.224 0.797 0.000
S180 0.000 0.000 0.950 1.185 2.342 1.426 2.30 0.064 0.462 0.354
S179 0.000 0.500 1.000 2.990 2.102 2.797 4.54 0.154 0.358 0.707
S178 0.000 0.000 1.000 1.321 1.784 1.567 2.55 0.194 0.834 0.354
S177 0.000 0.500 0.926 1.667 3.152 5.510 3.46 0.232 0.629 0.000
S176 0.000 0.000 0.964 2.410 3.243 3.747 4.32 0.272 0.796 0.000
S175 0.000 0.750 0.893 2.497 2.310 3.097 6.32 0.202 0.739 1.061
S174 0.000 0.500 0.964 1.572 2.908 2.124 7.36 0.083 0.663 0.707
S173 0.000 0.000 1.000 1.561 2.609 2.147 3.12 0.246 0.724 0.000
S172 0.000 0.000 0.857 1.778 2.397 2.265 2.90 0.318 0.859 1.061
S171 0.000 0.000 0.929 1.335 2.256 2.601 2.72 0.149 0.487 0.354
S170 0.000 0.500 1.000 2.818 6.842 5.090 5.71 0.293 1.037 0.707
S169 0.000 0.000 1.000 2.534 2.809 3.751 5.20 0.236 0.854 0.707
S168 0.000 0.000 1.000 1.916 2.226 1.748 2.16 0.184 0.545 0.000
S167 0.000 0.000 1.000 0.813 1.325 1.300 2.20 0.249 1.127 0.707
S166 0.000 0.000 1.000 0.926 1.113 1.126 1.46 0.162 0.635 0.000
S165 0.000 0.000 1.000 1.699 2.933 2.793 3.06 0.196 0.908 0.707
S164 0.000 0.000 1.000 1.006 1.283 1.474 1.79 0.124 0.467 0.000
S163 0.000 0.500 1.000 1.618 2.576 6.214 3.05 0.122 0.719 0.354
S162 0.000 0.000 0.973 2.022 2.397 2.974 3.50 0.132 0.557 0.000
S161 0.000 0.500 1.000 2.168 3.121 3.229 5.05 0.301 0.961 0.354
S160 0.000 0.000 1.000 1.608 3.534 1.646 2.24 0.154 0.692 0.000
S159 0.000 0.000 0.964 0.970 1.526 1.697 2.03 0.144 0.687 0.000
S158 0.000 0.000 1.000 1.553 1.578 1.817 2.29 0.129 0.366 0.000
S157 0.000 0.000 1.000 1.395 2.264 2.050 3.38 0.137 0.585 0.354
S156 0.000 0.000 1.000 1.118 1.418 1.802 1.84 0.207 0.737 0.354
S155 0.000 0.250 1.000 1.599 2.294 2.304 3.29 0.329 0.671 1.768
S154 0.000 0.000 1.000 0.979 1.271 1.328 1.87 0.185 0.637 0.354
S153 0.000 0.500 0.964 1.959 1.466 1.382 2.98 0.160 1.068 1.061
S152 0.000 0.750 1.000 1.255 2.559 8.312 2.17 0.349 0.903 0.707
S151 0.000 0.750 0.787 6.055 1.806 7.617 1.06 0.199 0.655 1.414
S150 0.000 0.000 0.929 1.602 1.898 2.199 2.76 0.247 0.758 0.354
S149 0.000 0.000 1.000 1.038 1.291 1.402 1.80 0.117 0.724 0.354
S148 0.000 0.750 1.090 3.082 1.542 11.678 5.33 0.167 1.056 0.354
S147 0.000 0.000 1.000 2.631 2.598 3.734 4.46 0.093 0.644 0.354
S146 0.000 0.250 1.000 0.877 1.449 1.356 1.82 0.157 1.108 0.707
S145 0.000 0.000 1.000 1.724 2.164 2.095 3.42 0.188 0.785 0.707
S144 0.000 0.000 1.000 1.564 3.449 2.245 2.95 0.303 0.561 0.354
S143 0.000 0.000 0.964 1.230 1.597 1.831 2.76 0.176 0.638 0.000
S142 0.000 0.000 1.000 1.166 1.522 1.615 2.26 0.265 0.888 0.354
S141 0.000 0.000 0.929 1.679 3.655 2.994 3.88 0.223 0.979 1.414
S140 0.000 0.000 1.000 2.317 2.107 2.418 2.46 0.203 0.585 0.707
S139 0.000 0.000 0.857 2.020 2.878 2.427 3.71 0.252 0.823 1.061
S138 0.000 0.000 1.000 1.607 2.167 2.391 3.09 0.099 0.450 0.000
S137 0.000 0.000 1.000 2.415 3.982 3.379 4.71 0.201 0.802 0.707
S136 0.000 0.000 1.000 1.253 1.406 1.455 1.59 0.212 0.687 0.354
S135 0.000 0.000 1.000 1.269 1.598 1.561 1.91 0.147 0.626 1.061
S134 0.000 0.000 1.000 1.375 1.852 1.804 2.32 0.185 0.656 0.000
S133 0.000 0.000 1.000 1.079 1.699 1.365 1.16 0.164 0.398 0.354
S132 0.000 0.000 0.857 1.508 1.876 2.020 2.66 0.143 0.667 0.354
S131 0.000 0.000 0.964 1.548 3.060 2.143 3.26 0.174 0.765 0.707
S130 0.000 0.000 0.856 1.389 1.623 2.126 3.08 0.246 0.664 0.354
S129 0.000 0.000 1.000 2.034 1.930 2.243 3.10 0.235 0.727 0.707
S128 0.000 0.000 0.944 1.295 1.707 2.099 2.27 0.071 0.724 0.000
S127 0.000 0.750 0.750 1.872 14.156 2.988 3.55 0.260 0.768 0.354
S126 0.000 0.000 0.999 0.958 1.471 2.260 3.92 0.161 0.557 1.061
S125 0.000 0.000 1.000 0.904 1.103 0.945 1.36 0.122 0.629 0.707
S124 0.000 0.000 1.000 1.127 1.696 1.590 3.56 0.212 0.803 0.707
S123 0.000 0.000 0.964 1.012 1.659 2.681 2.07 0.189 0.467 0.707
S122 0.000 0.500 0.964 2.966 3.771 4.138 2.75 0.216 0.981 0.354
S121 0.000 0.500 1.000 1.091 2.171 3.481 2.52 0.101 0.571 0.354
S120 0.000 0.000 1.000 2.460 4.160 2.681 2.98 0.194 0.528 0.000
S119 0.000 0.500 0.964 3.599 3.987 5.267 5.46 0.122 0.447 0.000
S118 0.000 0.000 1.000 1.461 2.936 2.002 2.38 0.156 0.539 0.000
S117 0.000 0.250 0.893 2.431 1.215 1.241 2.59 0.253 0.864 0.707
S116 0.000 0.000 0.964 1.238 1.922 2.067 2.46 0.207 0.687 0.354
S115 0.000 0.000 1.000 1.634 1.648 1.923 2.45 0.135 0.728 0.000
S114 0.000 0.000 0.977 0.964 1.467 1.984 2.20 0.137 0.742 1.061
S113 0.000 0.500 1.000 2.235 2.564 4.278 3.45 0.164 0.692 0.000
S112 0.000 0.000 1.000 1.542 1.687 1.797 2.86 0.239 0.816 0.354
S111 0.000 0.000 1.000 1.346 2.048 1.928 2.28 0.225 0.908 0.000
S110 0.000 0.750 0.730 0.500 1.035 0.625 1.05 0.216 0.956 0.707
S109 0.000 0.000 0.929 1.942 2.720 2.470 3.27 0.212 0.984 0.707
S108 0.000 0.000 1.000 1.043 1.789 1.661 2.62 0.200 0.687 0.707
S107 0.000 0.000 1.000 1.123 1.424 1.703 2.58 0.277 0.814 0.707
S106 0.000 0.000 1.000 1.551 1.918 2.582 3.23 0.263 0.920 0.000
S105 0.000 0.500 1.000 1.146 1.703 3.678 2.71 0.101 0.838 0.707
S104 0.000 0.000 0.874 1.208 2.154 1.813 1.56 0.169 0.727 0.707
S103 0.000 0.000 1.000 1.111 2.064 1.916 2.65 0.077 0.557 0.000
S102 0.000 0.000 0.970 1.148 2.116 1.962 2.56 0.166 0.819 0.707
S101 0.000 0.000 1.000 1.567 1.772 2.167 2.59 0.189 0.309 0.354
S100 0.000 0.000 1.000 1.696 2.091 1.793 2.91 0.141 0.577 0.000
S099 0.000 0.750 0.886 2.410 2.758 2.462 5.87 0.391 0.724 0.000
S098 0.000 0.000 1.000 1.644 1.681 1.968 3.40 0.207 0.957 0.707
S097 0.000 0.000 1.000 1.009 1.301 1.626 1.79 0.118 0.410 0.354
S096 0.000 0.000 1.000 1.673 1.483 1.753 2.44 0.350 0.924 1.414
S095 0.000 0.000 1.000 1.345 1.592 1.836 1.82 0.150 0.774 0.707
S094 0.000 0.000 0.964 1.700 2.307 2.572 4.05 0.185 0.855 0.000
S093 0.000 0.000 1.000 1.897 2.166 2.882 3.54 0.135 0.724 0.000
S092 0.000 0.000 1.000 1.178 2.504 2.125 2.87 0.200 0.814 0.354
S091 0.000 0.000 0.893 0.852 1.412 1.357 2.73 0.090 0.707 0.707
S090 0.000 0.000 1.000 1.984 2.113 2.294 3.32 0.246 0.618 0.354
S089 0.000 0.750 0.891 6.854 2.193 2.354 2.85 0.201 0.545 0.354
S088 0.000 0.000 1.000 2.068 2.503 3.000 4.07 0.237 0.410 0.000
S087 0.000 0.000 1.000 1.566 4.313 2.873 2.85 0.196 0.652 0.707
S086 0.000 0.000 0.924 2.013 2.597 2.479 3.68 0.292 0.834 0.000
S085 0.000 0.000 0.880 1.307 3.385 2.514 2.84 0.164 0.756 0.354
S084 0.000 0.000 1.000 1.795 1.887 2.720 3.04 0.206 0.797 0.354
S083 0.000 0.500 1.000 2.247 4.753 4.815 5.16 0.214 0.110 0.354
S082 0.000 0.000 0.999 0.789 1.058 1.190 1.35 0.291 1.384 0.354
S081 0.000 0.000 0.964 1.177 2.558 2.437 2.48 0.166 0.825 0.707
S080 0.000 0.500 1.000 1.130 3.723 2.102 3.19 0.426 0.817 1.061
S079 0.000 0.500 1.000 1.320 2.051 2.116 4.95 0.253 0.836 0.000
S078 0.000 0.000 1.000 1.120 1.865 1.645 2.06 0.170 0.418 0.354
S077 0.000 0.000 1.000 1.327 1.885 1.707 2.35 0.360 0.862 0.354
S076 0.000 0.000 0.964 1.597 2.249 2.194 2.50 0.153 0.675 0.707
S075 0.000 0.000 1.000 1.848 3.184 3.326 4.55 0.218 0.858 0.354
S074 0.000 0.000 0.929 1.309 1.640 1.664 1.86 0.186 0.713 0.707
S073 0.000 0.000 0.964 1.626 1.868 1.930 2.48 0.190 0.666 0.000
S072 0.000 0.750 1.000 1.283 14.469 2.321 2.31 0.175 0.559 0.707
S071 0.000 0.000 1.000 1.652 2.506 2.479 2.67 0.185 0.834 0.354
S070 0.000 0.000 0.788 1.907 2.070 2.267 2.46 0.169 0.643 0.000
S069 0.000 0.500 0.952 3.627 1.232 1.318 6.02 0.192 0.509 0.707
S068 0.000 0.750 0.621 1.335 1.479 2.720 3.73 0.163 0.320 0.354
S067 0.000 0.000 0.910 1.005 1.141 1.345 1.46 0.186 0.545 0.707
S066 0.000 0.750 1.000 2.697 3.905 6.625 5.19 0.437 0.868 1.061
S065 0.000 0.000 0.964 1.282 1.868 2.154 2.85 0.226 0.675 0.000
S064 0.000 0.000 1.000 2.103 3.373 2.784 4.02 0.079 0.823 0.354
S063 0.000 0.000 0.891 1.521 1.713 2.049 2.05 0.188 0.676 0.354
S062 0.000 0.000 1.000 1.922 2.015 2.030 2.41 0.084 0.534 0.354
S061 0.000 0.500 1.000 0.817 6.877 1.362 1.87 0.145 0.509 0.000
S060 0.000 0.000 0.964 1.567 2.094 2.395 3.00 0.210 0.509 0.354
S059 0.000 0.000 0.997 2.304 2.234 2.914 2.42 0.255 0.907 0.707
S058 0.000 0.000 1.000 2.149 3.804 2.734 4.26 0.209 0.647 1.414
S057 0.000 0.000 1.000 3.019 2.526 2.324 3.80 0.167 0.724 0.354
S056 0.000 0.000 1.000 1.477 1.661 2.534 2.42 0.181 0.577 1.061
S055 0.000 0.000 0.964 1.084 1.428 1.200 1.91 0.237 0.715 0.000
S054 0.000 0.000 0.857 2.392 3.006 2.951 3.40 0.158 0.597 0.707
S053 0.000 0.000 1.000 1.324 2.183 1.636 2.44 0.117 0.571 0.000
S052 0.000 0.000 1.000 1.873 2.227 2.108 2.54 0.140 0.537 0.354
S051 0.000 0.000 1.000 2.072 2.736 2.599 3.64 0.157 0.199 0.354
S050 0.000 0.000 1.000 1.454 2.037 2.662 2.68 0.216 0.460 0.000
S049 0.000 0.500 0.929 2.178 4.859 3.562 5.11 0.207 0.528 0.000
S048 0.000 0.000 1.000 1.377 1.914 2.253 2.99 0.196 0.537 0.354
S047 0.000 0.750 0.779 4.321 1.188 0.905 3.75 0.184 0.089 0.000
S046 0.000 0.000 1.000 1.099 1.136 1.422 1.60 0.100 0.666 0.707
S045 0.000 0.000 0.893 2.371 2.143 2.942 3.05 0.258 0.606 0.354
S044 0.000 0.000 0.964 1.625 2.098 2.295 3.22 0.170 0.699 0.354
S043 0.000 0.000 0.964 1.304 1.868 1.949 2.29 0.219 0.398 0.354
S042 0.000 0.000 1.000 1.204 1.659 1.917 2.41 0.160 0.545 1.061
S041 0.000 0.000 1.000 1.455 1.949 1.693 2.06 0.181 0.732 0.354
S040 0.000 0.000 1.000 1.422 1.965 1.220 1.95 0.121 0.687 0.707
S039 0.000 0.000 1.000 2.092 3.134 2.945 2.90 0.190 0.656 0.707
S038 0.000 0.500 0.964 3.351 6.988 3.231 6.15 0.196 0.687 0.354
S037 0.000 0.500 1.000 0.770 4.167 2.955 1.45 0.104 0.398 0.354
S036 0.000 0.000 0.996 1.325 1.921 1.766 2.58 0.226 0.728 0.354
S035 0.000 0.000 0.993 2.358 2.258 2.601 2.81 0.182 0.837 0.354
S034 0.000 0.250 1.000 2.966 3.338 4.372 5.71 0.182 0.617 0.354
S033 0.000 0.000 1.000 0.692 0.890 1.091 1.23 0.088 0.288 0.000
S032 0.000 0.000 1.000 1.131 1.565 2.121 2.21 0.148 0.736 0.000
S031 0.000 0.000 0.964 1.509 1.611 1.729 1.97 0.224 0.830 0.354
S030 0.000 0.500 0.776 3.296 2.679 4.596 4.46 0.222 0.948 0.000
S029 0.000 0.500 0.913 1.302 6.534 3.338 3.83 0.167 1.146 0.000
S028 0.000 0.000 0.964 1.741 3.268 2.695 3.60 0.172 0.585 0.707
S027 0.000 0.000 1.000 1.369 1.777 2.340 4.06 0.161 0.675 0.000
S026 0.000 0.500 1.000 4.696 5.658 5.382 6.23 0.173 0.790 0.354
S025 0.000 0.500 1.000 2.561 2.680 5.594 4.80 0.317 0.748 0.000
S024 0.000 0.000 1.000 2.115 3.116 3.465 3.90 0.182 0.835 1.414
S023 0.000 0.000 1.000 1.548 1.875 2.191 2.89 0.277 0.674 0.000
S022 0.000 0.000 1.000 1.121 1.435 1.802 2.78 0.217 0.610 0.000
S021 0.000 0.750 1.000 4.232 3.404 6.555 7.42 0.484 0.819 1.061
S020 0.000 0.000 0.860 1.631 2.575 2.375 3.25 0.256 1.017 1.414
S019 0.000 0.000 0.893 1.980 2.789 3.059 5.71 0.205 0.744 0.354
S018 0.000 0.000 1.000 1.658 2.039 2.027 2.61 0.146 0.656 1.061
S017 0.000 0.000 0.964 1.299 1.681 1.911 2.06 0.201 0.937 0.354
S016 0.000 0.500 0.964 4.064 5.145 3.684 5.76 0.076 0.551 0.354
S015 0.000 0.000 1.000 1.045 1.275 1.418 2.60 0.292 0.713 0.354
S014 0.000 0.000 1.000 0.818 1.350 1.324 1.52 0.225 0.467 0.354
S013 0.000 0.000 1.000 1.071 1.270 1.418 2.42 0.093 0.710 0.000
S012 0.000 0.500 0.821 3.183 5.186 5.265 5.37 0.193 0.577 0.354
S011 0.000 0.500 1.000 1.871 2.802 2.380 3.38 0.155 0.814 0.354 0.667 1.188 3.28 3.53 0.313 0.130 0.769
S010 0.000 0.000 0.964 1.848 2.864 2.614 3.65 0.263 1.243 0.000 0.667 1.624 4.46 5.13 0.307 0.138 0.737
S009 0.000 0.000 1.000 1.347 2.064 2.250 2.51 0.296 0.635 0.354
S008 2.000 0.500 0.762 3.125 0.986 1.307 1.29 0.024 0.000 0.000
S007 0.000 0.000 0.929 1.095 1.324 1.792 2.00 0.211 0.702 0.707
S006 0.000 0.000 1.000 1.950 2.418 2.383 3.76 0.290 0.960 0.354 0.667 0.930 3.35 3.41 0.324 0.131 0.556
S005 0.000 0.000 1.000 1.828 2.952 2.998 3.55 0.163 0.577 0.707
S004 0.000 0.500 0.893 5.980 2.109 2.576 3.29 0.187 0.713 0.354
S003 2.000 0.000 0.857 0.955 1.375 2.160 1.96 0.216 0.847 0.000 0.613 0.719 1.59 3.71 0.102 0.098 0.819
S002 0.000 0.000 0.917 1.690 2.410 2.997 2.71 0.254 0.571 0.354 0.667 0.798 2.27 1.96 0.159 0.146 0.624
S001 0.000 0.750 0.728 1.912 2.523 4.542 7.27 0.179 0.659 0.707

# Inspection: select(sub[sub$Participant == "S008", ], starts_with("Item_PID"))
sub[
  sub$Participant %in% outliers_questionnaires,
  names(sub)[!names(sub) %in% c(
    "Participant", "Nationality", "Age",
    "Ethnicity", "Sex", "Student", "Education",
    "Interval", "AttentionCheck_Session1",
    "AttentionCheck_Session2"
  )]
] <- NA

Final Sample

We collected data from 181 participants.

illusion1 <- illusion1[!illusion1$Participant %in% outliers_illusion1, ]
illusion2 <- illusion2[!illusion2$Participant %in% outliers_illusion1, ]
perceptual <- perceptual[!perceptual$Participant %in% outliers_illusion1, ]
sub <- sub[!sub$Participant %in% outliers_illusion1, ]

The final sample included 180 participants (Mean age = 28.6, SD = 8.6, range: [20, 64]; Sex: 49.4% females, 50.6% males, 0.0% other; Education: High school, 33.89%; Bachelor, 45.56%; Master, 17.22%; Doctorate, 1.67%; Other, 1.67%), from which 2 (1.11%) completed session 2.

Country of Origin

select(sub, region = Nationality) |>
  group_by(region) |>
  summarize(n = n()) |>
  right_join(map_data("world"), by = "region") |>
  ggplot(aes(long, lat, group = group)) +
  geom_polygon(aes(fill = n)) +
  scale_fill_gradientn(colors = c("#FFEB3B", "red")) +
  theme_void() +
  ggtitle("Number of participants by country of origin")

Age

estimate_density(sub$Age) |>
  normalize(select = y) |> 
  ggplot(aes(x = x, y = y)) +
  geom_area(fill = "#607D8B") +
  geom_vline(xintercept = mean(sub$Age), color = "red") +
  geom_label(data = data.frame(x = mean(sub$Age) * 1.1, y = 0.95), color = "red", label = paste0("Mean = ", format_value(mean(sub$Age)))) +
  scale_x_continuous(expand = c(0, 0)) +
  scale_y_continuous(expand = c(0, 0)) +
  labs(title = "Age", y = "Distribution", color = NULL) +
  theme_modern(axis.title.space = 10) +
  theme(
    plot.title = element_text(size = rel(1), face = "bold", hjust = 0.5),
    plot.subtitle = element_text(face = "italic", hjust = 0.5),
    axis.text.y = element_blank(),
    axis.text.x = element_text(size = rel(0.8)),
    axis.title.x = element_blank()
  )

Ethnicity

plot_waffle <- function(sub, what = "Nationality", title = what, rows = 8, size = 3) {
  ggwaffle::waffle_iron(sub, what, rows = rows) |>
    ggplot(aes(x, y)) +
    geom_point(aes(color = group), shape = "square", size = size) +
    coord_equal() +
    ggtitle(title) +
    labs(fill = "", color = "") +
    theme_void() +
    theme(
      plot.title = element_text(face = "bold", hjust = 0.5),
      legend.key.height = unit(1, "mm"),
      legend.key.width = unit(1, "mm")
    )
}

plot_waffle(sub, "Ethnicity", rows = 10, size = 5) +
  scale_color_manual(values = c("Hispanic" = "#FF5722", "Caucasian" = "#2196F3", "African" = "#4CAF50", "Asian" = "#FFC107", "Other" = "#795548"))

Education

sub |>
  ggplot(aes(x = Education)) +
  geom_bar(aes(fill = Education)) +
  scale_y_continuous(expand = c(0, 0), breaks= scales::pretty_breaks()) +
  scale_fill_viridis_d(guide = "none") +
  labs(title = "Education", y = "Number of Participants") +
  theme_modern(axis.title.space = 15) +
  theme(
    plot.title = element_text(size = rel(1), face = "bold", hjust = 0.5),
    plot.subtitle = element_text(face = "italic", hjust = 0.5),
    axis.text.y = element_text(size = rel(0.8)),
    axis.text.x = element_text(size = rel(0.8)),
    axis.title.x = element_blank()
  )

Save Preprocessed

write.csv(illusion1, "../data/preprocessed_illusion1.csv", row.names = FALSE)
write.csv(illusion2, "../data/preprocessed_illusion2.csv", row.names = FALSE)
write.csv(perceptual, "../data/preprocessed_perceptual.csv", row.names = FALSE)
write.csv(sub, "../data/preprocessed_questionnaires.csv", row.names = FALSE)